import numpy as np
import pandas as pd
import biocircuits
import scipy.integrate
import scipy.signal
import bokeh.io
bokeh.io.output_notebook()
import panel as pn
pn.extension()
def style(p, autohide=False):
p.title.text_font="Helvetica"
p.title.text_font_size="16px"
p.title.align="center"
p.xaxis.axis_label_text_font="Helvetica"
p.yaxis.axis_label_text_font="Helvetica"
p.xaxis.axis_label_text_font_size="13px"
p.yaxis.axis_label_text_font_size="13px"
p.background_fill_alpha = 0
if autohide: p.toolbar.autohide=True
return p

This set of inequalities imparts an interesting condition on the dilution/degradation rate. Too low or too high, and we lose bistability. Since everything is a ratio of sorts, I will limit my analysis to letting everything in the dilution term go to 1 (so γ1 = 1, kr = 1).
Let's derive another inequality without $\gamma$:
\begin{align} \alpha &< \beta \kappa - \kappa - \beta \\[0.5em] &< (\beta - 1) \kappa - \beta \\[0.5em] &< (\kappa - 1) \beta - \kappa \\[0.5em] \end{align}Both $\kappa$ and $\beta$ need to be greater than 1. We note that there is implicitly an upper bound on leakage. Too much leakage, and the system never reaches a steady state. This matches our intuition.
When $\gamma_0$ is small relative to dilution, i.e., the protein is very stable, $\gamma$ grows larger, requires that $\kappa$ be larger as well (since this is scaling term of the upper bound). The presence of large kappa means effective dilution which matches our need for including the term in the first place. The lower bound that is set ensures that the degradation isn't too fast such that the dilution can't take effect.
On the other hand, the more unstable the protein (smaller $\gamma$), the lower the production rate needs to be so that dilution can take effect.
Formally, these are the values I tried: $\alpha = 1.3, \beta = 11, \gamma = 25, \kappa = 3.5$
def stabilizer(inds, deriv):
stable_inds = []
for i in inds:
if deriv[i-1] > deriv[i+1]: stable_inds.append(i)
return stable_inds
_black = "#1c2641"
_purple = "#B09DC9"
_purple = "#C5b2Df"
_blue = "#74A0B2"
_blue = "#84B0C2"
alpha_slider = pn.widgets.FloatSlider(name="α", start=0.9, end=1.38, step=0.01, value=1.3, width=175)
beta_slider = pn.widgets.FloatSlider(name="β", start=10.5, end=11.1, step=0.01, value=11, width=175)
gamma_slider = pn.widgets.FloatSlider(name="γ", start=24.5, end=26.5, step=0.1, value=25, width=175)
kappa_slider = pn.widgets.FloatSlider(name="κ", start=3.3, end=3.7, step=0.01, value=3.5, width=175)
spin_slider = pn.widgets.FloatSlider(name="spin", start=0.0, end=np.pi/2, step=0.01, value=0, width=200)
start_angles = np.linspace(0, 2*np.pi, 10)
end_angles = start_angles + 0.1
X = np.linspace(0, 4, 600)
@pn.depends(alpha_slider.param.value, beta_slider.param.value,
gamma_slider.param.value, kappa_slider.param.value,
spin_slider.param.value)
def plotter(alpha, beta, gamma, kappa, spin):
deriv = alpha + beta * X / (1+X) - X - gamma * X / (1+kappa*X)
p = bokeh.plotting.figure(height=400, width=550, title="Bistability",
x_axis_label="X", y_axis_label="dX/dt")
p.line(X, deriv, line_width=3, color=_black)
p.line(X, np.zeros(len(X)), color="black", line_width=2, line_dash="dotdash")
inds = scipy.signal.find_peaks(-np.abs(deriv))[0]
inds = inds[np.abs(deriv[inds]) < 0.01]
roots = [X[_] for _ in inds]
p.circle(roots, np.zeros(len(roots)), size=11, color=_purple,
line_color='black', line_width=3)
stable_inds = stabilizer(inds, deriv)
roots_stable = X[stable_inds]
for start_angle, end_angle in zip(start_angles, end_angles):
out_radius = 0.015*(np.sin(10*spin))**3 + 0.025
p.annular_wedge(x=roots_stable,
y=np.zeros(len(roots_stable)),
line_color=_black,
inner_radius=0.1,
outer_radius=0.1+out_radius,
start_angle=start_angle+spin,
end_angle=end_angle+spin, )
p.circle(roots_stable, np.zeros(len(roots_stable)),
size=11, color=_blue,
line_color='black', line_width=3)
return style(p)
lay_widgets = pn.Row(pn.Column(alpha_slider, beta_slider),
pn.Column(gamma_slider, kappa_slider),
align="center")
lay_spin = pn.Row(spin_slider, align="center")
pn.Column(lay_widgets, plotter, lay_spin,)
Whenever df(x)/dx is negative, where f(x) = dX/dt, there is a stable point. These points are marked with blinkers.
The bistability here leads me to believe that I would expect a fraction of the colonies to be fluorescing at a high consistent signal, and another fraction to be at a lower consistent signal. This fraction is dependent on the initial concentration of Xo in each cell, as shown below. My initial picture is not completely precise, but more details will be discussed after some plots!
Here, I am plotting trajectories for various initial concentrations of X.
palette = ['#1c2641', '#27324f', '#333e5e', '#3f4a6d', '#4b567c',
'#57628b', '#636e9a', '#6f7aa9', '#1c2641', '#3d476a',
'#5e6994', '#808bbe', '#9091c1', '#a097c5', '#b09dc9',
'#bca4cb', '#c8abcd', '#d4b3cf', '#ceafce', '#c8abcd',
'#c2a8cc', '#bba4ca', '#b5a0c9', '#ae9cc8', '#a599c6',
'#9d95c4', '#9492c2', '#8c8fc0', '#838cbe', '#7580b0',
'#636e9a', '#515c83', '#3f4a6d', '#2d3857', '#1c2641']
alpha, beta, gamma, kappa = 1.3, 11, 25, 3.5
args = (alpha, beta, gamma, kappa)
def deriv(X, t, alpha, beta, gamma, kappa):
return alpha + beta * X / (1+X) - X - gamma * X / (1+kappa*X)
t = np.linspace(0, 15, 500)
Xos = np.concatenate((np.linspace(0, 0.5, 8), np.linspace(0.5, 5, 30)))
X_trajs = []
for Xo in Xos:
X_traj = scipy.integrate.odeint(deriv, Xo, t, args=args).T[0]
X_trajs.append(list(X_traj))
p = bokeh.plotting.figure(height=400, width=500,
title="Deterministic Trajectories",
x_axis_label="dimensionless time",
y_axis_label="dimensionless [X]"
)
for X_traj, color in zip(X_trajs, palette):
p.line(t, X_traj, color=color, line_width=2.4)
bokeh.io.show(style(p))
We see that in the beginning, there might be some initial distribution or spread of fluorescent signals (depending on how the division time compares to the time it takes the trajectories to reach steady state. If growth time is much longer, then the initial distribution will be quite uniform.) Over time, the fluorescence patterns should become more and more consistent and reflect our two steady states.
As time goes on, I would expect that given exterior limiting conditions, the available Xo building blocks will be "less available" (depending on what Xo is, this might not be the case), the colony would then not be able to sustain such high production, and the larger colonies will start producing daughter cells fluorescing at the lower signal. (Never done this before, so this is all just wishful thinking...)
I also had another thought. Looking at the response times, we see that it takes a little longer for the higher concentration to reach its steady state. What this means to me is that we will have a wider distribution / more noise in the higher fluorescent signal. I think this will be true even if we take the data, classify each signal as HIGH / LOW, and then normalize their concentrations to the peak fluorsecence levels (without this normalization, you would likely experience more noise in the higher signal just by virtue of having a larger sample size, and it might not necessarily be due to slower response times). Basically, each measurement is capturing more variation in higher X. The asymmetry in how quickly the peaks rise (faster for larger Xo, longer for Xo around 0.5-1.0) also leads me to believe that this peak will be skewed right. This hypothesis assumes a uniform range of [X], which is definitely not the case, but can still be applied to various initial distributions of Xo.
I wanted to explore how "robust" these stability points are to fluctuations in the parameters. During one of Justin's office hours, he showed how cellular concentrations sometimes resulted in stunningly low counts of the actual species of interest. Though I'm not sure how to properly to simulate this (it seems like it requires discretizing my concentrations?), I want to try two other things:
The thinking: given that we have written down a deterministic set of equations, and that finding bistability required some probing of parameter space, a small wiggle factor shouldn't push our system out of bistability, but a larger one might. Let's test this!
def X_trajs_normal(Xos, t, alphas, betas, gammas, kappas):
X_trajs = []
for Xo, alpha, beta, gamma, kappa in zip(Xos, alphas, betas, gammas, kappas):
args = (alpha, beta, gamma, kappa)
X_traj = scipy.integrate.odeint(deriv, Xo, t, args=args).T[0]
X_trajs.append(list(X_traj))
return X_trajs
t = np.linspace(0, 15, 500)
mu_alpha = 1.3
mu_beta = 11
mu_gamma = 25
mu_kappa = 3.5
Xo_single_slider = pn.widgets.FloatSlider(name="single Xo", start=0.0, end=5.0, step=0.1, value=0.60, width=300)
noise_percent_slider = pn.widgets.FloatSlider(name="noise %", start=0, end=10, value=2, step=0.1, width=300)
@pn.depends(Xo_single_slider.param.value, noise_percent_slider.param.value)
def normal_stochastic_plotter(Xo_single, noise_percent):
sig_alpha = noise_percent/100
sig_beta = noise_percent/100
sig_gamma = noise_percent/100
sig_kappa = noise_percent/100
alphas = np.random.normal(loc=mu_alpha, scale=sig_alpha*mu_alpha, size=len(t))
betas = np.random.normal(loc=mu_beta, scale=sig_beta*mu_beta, size=len(t))
gammas = np.random.normal(loc=mu_gamma, scale=sig_gamma*mu_gamma, size=len(t))
kappas = np.random.normal(loc=mu_kappa, scale=sig_kappa*mu_kappa, size=len(t))
p = bokeh.plotting.figure(
height=400, width=500,
title=f"Stochastic Gaussian: Single Xo = {Xo_single}",
x_axis_label="dimensionless time",
y_axis_label="dimensionless [X]",
y_range=(-0.05, 4.8)
)
q = bokeh.plotting.figure(
height=400, width=500,
title="Stochastic Gaussian: Collective Xo",
x_axis_label="dimensionless time",
y_axis_label="dimensionless [X]",
y_range=(-0.05, 4.8)
)
Xos = [Xo_single]*50
X_trajs_single = X_trajs_normal(Xos, t, alphas, betas, gammas, kappas)
Xos = np.concatenate((np.linspace(0, 0.5, 8), np.linspace(0.5, 5, 30)))
X_trajs_collective = X_trajs_normal(Xos, t, alphas, betas, gammas, kappas)
for X_traj, color in zip(X_trajs_single, palette):
p.line(t, X_traj, color=palette[0], line_width=2.1, line_alpha=0.5)
for X_traj, color in zip(X_trajs_collective, palette):
q.line(t, X_traj, color=color, line_width=2.1)
# DETERMINISTIC...
Xos = np.concatenate((np.linspace(0, 0.5, 8), np.linspace(0.5, 5, 30)))
X_trajs_deterministic = []
for Xo in Xos:
X_traj = scipy.integrate.odeint(deriv, Xo, t, args=(1.3, 11, 25, 3.5)).T[0]
X_trajs_deterministic.append(list(X_traj))
r = bokeh.plotting.figure(height=400, width=500,
title="Deterministic Trajectories",
x_axis_label="dimensionless time",
y_axis_label="dimensionless [X]"
)
for X_traj, color in zip(X_trajs_deterministic, palette):
r.line(t, X_traj, color=color, line_width=2.4)
return pn.Row(style(p), style(q), style(r))
pn.Column(pn.Row(pn.Spacer(width=300), noise_percent_slider),
normal_stochastic_plotter,
pn.Row(pn.Spacer(width=50), Xo_single_slider)
)
We see from the left plot that around the "cutoff" region, we get bistability that is not deterministic based on its initial concentration (whereas with deterministic, this threshold is sharp.) This helped clarify how those animated red dots in Rong's presentation were able to move from one fixed point to another. This effect is further dramatized in the middle plot, where the ordering of the colors are not preserved in the regime of high noise.
It is interesting that as the point travels through the vector field, it grows more and more robust to noise. We also see that noise has the habit of slowing down response times, or making for sticky trajectories in the collective response (the sigmoidal curve can often be delayed.)
Ideally I would like to implement perlin noise, but let's see what sine functions do and call it a night!
Xo_single_slider_sin = pn.widgets.FloatSlider(name="single Xo", start=0.0, end=5.0, step=0.1, value=0.30, width=300)
noise_percent_slider_sin = pn.widgets.FloatSlider(name="noise amplitude", start=0, end=10, value=5, step=0.1, width=300)
freq_offset_slider_sin = pn.widgets.FloatSlider(name="frequency", start=0.0, end=10.0, value=1.0, width=140)
phase_offset_slider_sin = pn.widgets.FloatSlider(name="phase offset", start=0.0, end=2*np.pi, value=5.0, width=140)
time_range_slider_sin = pn.widgets.RangeSlider(name="time range", start=0.0, end=50, value=(0.0, 15.0))
@pn.depends(Xo_single_slider_sin.param.value, noise_percent_slider_sin.param.value,
freq_offset_slider_sin.param.value, phase_offset_slider_sin.param.value,
time_range_slider_sin.param.value)
def sinusoidal_stochastic_plotter(Xo_single, noise_percent, freq_offset, phase_offset, time_range):
alphas = np.abs(noise_percent/100*mu_alpha * np.sin(t) + mu_alpha)
betas = np.abs(noise_percent/100*mu_beta * np.sin(freq_offset*t+0.5*phase_offset) + mu_beta)
gammas = np.abs(noise_percent/100*mu_gamma * np.sin(freq_offset*t+1.0*phase_offset) + mu_gamma)
kappas = np.abs(noise_percent/100*mu_kappa * np.sin(freq_offset*t+1.5*phase_offset) + mu_kappa)
p = bokeh.plotting.figure(
height=400, width=500,
title=f"Stochastic Sinusoid: Single Xo = {Xo_single}",
x_axis_label="dimensionless time",
y_axis_label="dimensionless [X]",
y_range=(-0.05, 4.8)
)
q = bokeh.plotting.figure(
height=400, width=500,
title="Stochastic Sinusoid: Collective Xo",
x_axis_label="dimensionless time",
y_axis_label="dimensionless [X]",
y_range=(-0.05, 4.8)
)
Xos = [Xo_single]*50
_t = np.linspace(time_range[0], time_range[1], 500)
X_trajs_single = X_trajs_normal(Xos, _t, alphas, betas, gammas, kappas)
Xos = np.concatenate((np.linspace(0, 0.5, 8), np.linspace(0.5, 5, 30)))
X_trajs_collective = X_trajs_normal(Xos, t, alphas, betas, gammas, kappas)
for X_traj, color in zip(X_trajs_single, palette):
p.line(_t, X_traj, color=palette[0], line_width=2.1, line_alpha=0.5)
for X_traj, color in zip(X_trajs_collective, palette):
q.line(t, X_traj, color=color, line_width=2.4)
# DETERMINISTIC...
Xos = np.concatenate((np.linspace(0, 0.5, 8), np.linspace(0.5, 5, 30)))
X_trajs_deterministic = []
for Xo in Xos:
X_traj = scipy.integrate.odeint(deriv, Xo, t, args=(1.3, 11, 25, 3.5)).T[0]
X_trajs_deterministic.append(list(X_traj))
r = bokeh.plotting.figure(height=400, width=500,
title="Deterministic Trajectories",
x_axis_label="dimensionless time",
y_axis_label="dimensionless [X]"
)
for X_traj, color in zip(X_trajs_deterministic, palette):
r.line(t, X_traj, color=color, line_width=2.4)
return pn.Row(style(p), style(q), style(r))
top_widgets = pn.Column(
pn.Row(pn.Spacer(width=300), noise_percent_slider_sin),
pn.Row(pn.Spacer(width=300), freq_offset_slider_sin, phase_offset_slider_sin)
)
bottom_widgets = pn.Column(
pn.Row(pn.Spacer(width=50), Xo_single_slider_sin),
pn.Row(pn.Spacer(width=50), time_range_slider_sin)
)
dashboard = pn.Column(top_widgets, sinusoidal_stochastic_plotter, bottom_widgets)
dashboard
What this shows is that the mechanism by which we introduce noise matters.
Some observations:
noise amplitude: 8.20, phase offset: 5.90 Decoupling the phases can sometimes map low concentrations to high stable states, and high to low. Weird! Overall, I am not entirely sure what to make of these results and how they correspond to real systems, At first I thought, oh well as long as the perturbations don't bother the inequality, things will be good, but then I realized the second condition for bistability, that f'(x) where f = df/dx also needs to hold, and this imposes its own set of constraints we need to satisfy as well. I think altogether it demonstrates how bistability can endure, albeit less deterministically.